require(ISLR)
## Loading required package: ISLR
require(e1071)
## Loading required package: e1071

a

data(Auto)
mpgh <- ifelse(Auto$mpg > median(Auto$mpg), 1,0)
Auto <- data.frame(Auto, mpgh=as.factor(mpgh))

b

set.seed(3255) 
costs <- c(0.01, 0.1, 1,  5, 10, 100)
tune.out <- tune(svm, mpgh ~ .  , data=Auto, kernel="linear", 
               ranges=list(cost=costs))
print(summary(tune.out))
## 
## Parameter tuning of 'svm':
## 
## - sampling method: 10-fold cross validation 
## 
## - best parameters:
##  cost
##     1
## 
## - best performance: 0.01282051 
## 
## - Detailed performance results:
##    cost      error dispersion
## 1 1e-02 0.07384615 0.04219942
## 2 1e-01 0.04083333 0.03008810
## 3 1e+00 0.01282051 0.02179068
## 4 5e+00 0.01538462 0.02477158
## 5 1e+01 0.02044872 0.02354784
## 6 1e+02 0.03070513 0.02357884
bestmod_lin = tune.out$best.model
print(summary(bestmod_lin))
## 
## Call:
## best.tune(method = svm, train.x = mpgh ~ ., data = Auto, ranges = list(cost = costs), 
##     kernel = "linear")
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  linear 
##        cost:  1 
##       gamma:  0.003205128 
## 
## Number of Support Vectors:  56
## 
##  ( 26 30 )
## 
## 
## Number of Classes:  2 
## 
## Levels: 
##  0 1

c

print("tuning radial")
## [1] "tuning radial"
gammas <- c(0.5,1,2,3,4)
set.seed(21)
tune.out <- tune(svm, mpgh ~ .  , data=Auto, kernel="radial", 
               ranges=list(cost=costs, gamma=gammas))
print(summary(tune.out))
## 
## Parameter tuning of 'svm':
## 
## - sampling method: 10-fold cross validation 
## 
## - best parameters:
##  cost gamma
##     1   0.5
## 
## - best performance: 0.05083333 
## 
## - Detailed performance results:
##     cost gamma      error dispersion
## 1  1e-02   0.5 0.58673077 0.07310319
## 2  1e-01   0.5 0.08391026 0.04089684
## 3  1e+00   0.5 0.05083333 0.03959633
## 4  5e+00   0.5 0.05339744 0.03666015
## 5  1e+01   0.5 0.05596154 0.03738529
## 6  1e+02   0.5 0.05596154 0.03738529
## 7  1e-02   1.0 0.58673077 0.07310319
## 8  1e-01   1.0 0.58673077 0.07310319
## 9  1e+00   1.0 0.06615385 0.04511007
## 10 5e+00   1.0 0.06621795 0.04193487
## 11 1e+01   1.0 0.06621795 0.04193487
## 12 1e+02   1.0 0.06621795 0.04193487
## 13 1e-02   2.0 0.58673077 0.07310319
## 14 1e-01   2.0 0.58673077 0.07310319
## 15 1e+00   2.0 0.15807692 0.05964195
## 16 5e+00   2.0 0.15044872 0.05151660
## 17 1e+01   2.0 0.15044872 0.05151660
## 18 1e+02   2.0 0.15044872 0.05151660
## 19 1e-02   3.0 0.58673077 0.07310319
## 20 1e-01   3.0 0.58673077 0.07310319
## 21 1e+00   3.0 0.40782051 0.18590107
## 22 5e+00   3.0 0.38750000 0.17430064
## 23 1e+01   3.0 0.38750000 0.17430064
## 24 1e+02   3.0 0.38750000 0.17430064
## 25 1e-02   4.0 0.58673077 0.07310319
## 26 1e-01   4.0 0.58673077 0.07310319
## 27 1e+00   4.0 0.52282051 0.07610630
## 28 5e+00   4.0 0.51519231 0.07882041
## 29 1e+01   4.0 0.51519231 0.07882041
## 30 1e+02   4.0 0.51519231 0.07882041
print("best model for radial")
## [1] "best model for radial"
bestmod_rad = tune.out$best.model
print( summary(bestmod_rad) )
## 
## Call:
## best.tune(method = svm, train.x = mpgh ~ ., data = Auto, ranges = list(cost = costs, 
##     gamma = gammas), kernel = "radial")
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  radial 
##        cost:  1 
##       gamma:  0.5 
## 
## Number of Support Vectors:  276
## 
##  ( 136 140 )
## 
## 
## Number of Classes:  2 
## 
## Levels: 
##  0 1
print("tuning polynomial")
## [1] "tuning polynomial"
gammas <- c(0.5,1,2,3,4)
degs <- c(2,3)
set.seed(463)
tune.out <- tune(svm, mpgh ~ . , data=Auto, kernel="polynomial", 
               ranges=list(cost=costs, gamma=gammas, degrees=degs))
print(summary(tune.out))
## 
## Parameter tuning of 'svm':
## 
## - sampling method: 10-fold cross validation 
## 
## - best parameters:
##  cost gamma degrees
##     5   0.5       2
## 
## - best performance: 0.03314103 
## 
## - Detailed performance results:
##     cost gamma degrees      error dispersion
## 1  1e-02   0.5       2 0.07647436 0.03973675
## 2  1e-01   0.5       2 0.04589744 0.02906486
## 3  1e+00   0.5       2 0.03583333 0.02480189
## 4  5e+00   0.5       2 0.03314103 0.02942215
## 5  1e+01   0.5       2 0.03314103 0.02942215
## 6  1e+02   0.5       2 0.03314103 0.02942215
## 7  1e-02   1.0       2 0.04846154 0.02547525
## 8  1e-01   1.0       2 0.03833333 0.02187600
## 9  1e+00   1.0       2 0.03314103 0.02942215
## 10 5e+00   1.0       2 0.03314103 0.02942215
## 11 1e+01   1.0       2 0.03314103 0.02942215
## 12 1e+02   1.0       2 0.03314103 0.02942215
## 13 1e-02   2.0       2 0.03833333 0.02187600
## 14 1e-01   2.0       2 0.03314103 0.02942215
## 15 1e+00   2.0       2 0.03314103 0.02942215
## 16 5e+00   2.0       2 0.03314103 0.02942215
## 17 1e+01   2.0       2 0.03314103 0.02942215
## 18 1e+02   2.0       2 0.03314103 0.02942215
## 19 1e-02   3.0       2 0.04083333 0.02729035
## 20 1e-01   3.0       2 0.03314103 0.02942215
## 21 1e+00   3.0       2 0.03314103 0.02942215
## 22 5e+00   3.0       2 0.03314103 0.02942215
## 23 1e+01   3.0       2 0.03314103 0.02942215
## 24 1e+02   3.0       2 0.03314103 0.02942215
## 25 1e-02   4.0       2 0.03314103 0.02942215
## 26 1e-01   4.0       2 0.03314103 0.02942215
## 27 1e+00   4.0       2 0.03314103 0.02942215
## 28 5e+00   4.0       2 0.03314103 0.02942215
## 29 1e+01   4.0       2 0.03314103 0.02942215
## 30 1e+02   4.0       2 0.03314103 0.02942215
## 31 1e-02   0.5       3 0.07647436 0.03973675
## 32 1e-01   0.5       3 0.04589744 0.02906486
## 33 1e+00   0.5       3 0.03583333 0.02480189
## 34 5e+00   0.5       3 0.03314103 0.02942215
## 35 1e+01   0.5       3 0.03314103 0.02942215
## 36 1e+02   0.5       3 0.03314103 0.02942215
## 37 1e-02   1.0       3 0.04846154 0.02547525
## 38 1e-01   1.0       3 0.03833333 0.02187600
## 39 1e+00   1.0       3 0.03314103 0.02942215
## 40 5e+00   1.0       3 0.03314103 0.02942215
## 41 1e+01   1.0       3 0.03314103 0.02942215
## 42 1e+02   1.0       3 0.03314103 0.02942215
## 43 1e-02   2.0       3 0.03833333 0.02187600
## 44 1e-01   2.0       3 0.03314103 0.02942215
## 45 1e+00   2.0       3 0.03314103 0.02942215
## 46 5e+00   2.0       3 0.03314103 0.02942215
## 47 1e+01   2.0       3 0.03314103 0.02942215
## 48 1e+02   2.0       3 0.03314103 0.02942215
## 49 1e-02   3.0       3 0.04083333 0.02729035
## 50 1e-01   3.0       3 0.03314103 0.02942215
## 51 1e+00   3.0       3 0.03314103 0.02942215
## 52 5e+00   3.0       3 0.03314103 0.02942215
## 53 1e+01   3.0       3 0.03314103 0.02942215
## 54 1e+02   3.0       3 0.03314103 0.02942215
## 55 1e-02   4.0       3 0.03314103 0.02942215
## 56 1e-01   4.0       3 0.03314103 0.02942215
## 57 1e+00   4.0       3 0.03314103 0.02942215
## 58 5e+00   4.0       3 0.03314103 0.02942215
## 59 1e+01   4.0       3 0.03314103 0.02942215
## 60 1e+02   4.0       3 0.03314103 0.02942215
print("best model for polynomail")
## [1] "best model for polynomail"
bestmod_poly = tune.out$best.model
print( summary(bestmod_poly) )
## 
## Call:
## best.tune(method = svm, train.x = mpgh ~ ., data = Auto, ranges = list(cost = costs, 
##     gamma = gammas, degrees = degs), kernel = "polynomial")
## 
## 
## Parameters:
##    SVM-Type:  C-classification 
##  SVM-Kernel:  polynomial 
##        cost:  5 
##      degree:  3 
##       gamma:  0.5 
##      coef.0:  0 
## 
## Number of Support Vectors:  78
## 
##  ( 43 35 )
## 
## 
## Number of Classes:  2 
## 
## Levels: 
##  0 1

d

Define a func to iteratively plot all pairs

plotpairs <- function(fit) {
    for (name in names(Auto)[!(names(Auto) %in% c("mpg", "mpgh", "name"))]) {
        plot(fit, Auto, as.formula(paste("mpg~", name, sep = "")))
    }
}
plotpairs(bestmod_lin)

plotpairs(bestmod_rad)

plotpairs(bestmod_poly)